load libraries

In [1]:
import os
import cv2
import glob
import numpy as np
from keras.models import *
from keras.layers import *
from keras.applications import *
from keras.preprocessing.image import *
Using TensorFlow backend.

加载数据集

In [2]:
basedir = "/ext/Data/distracted_driver_detection/"

model_image_size = 299

print("-------- loading train data")
X_train = list()
y_train = list()
for i in range(10):
    dir = os.path.join(basedir, "train", "c%d"%i)
    image_files = glob.glob(os.path.join(dir,"*.jpg"))
    print("loding {}, image count={}".format(dir, len(image_files)))
    for image_file in image_files:
        image = cv2.imread(image_file)
        X_train.append(cv2.resize(image, (model_image_size, model_image_size)))
        label = np.zeros(10, dtype=np.uint8)
        label[i]=1
        y_train.append(label)
X_train = np.array(X_train)
y_train = np.array(y_train)
        
print("-------- loading valid data")
X_valid = list()
y_valid = list()
for i in range(10):
    dir = os.path.join(basedir, "valid", "c%d"%i)
    image_files = glob.glob(os.path.join(dir,"*.jpg"))
    print("loding {}, image count={}".format(dir, len(image_files)))
    for image_file in image_files:
        image = cv2.imread(image_file)
        X_valid.append(cv2.resize(image, (model_image_size, model_image_size)))
        label = np.zeros(10, dtype=np.uint8)
        label[i]=1
        y_valid.append(label)
X_valid = np.array(X_valid)
y_valid = np.array(y_valid)
-------- loading train data
loding /ext/Data/distracted_driver_detection/train/c0, image count=2308
loding /ext/Data/distracted_driver_detection/train/c1, image count=2096
loding /ext/Data/distracted_driver_detection/train/c2, image count=2136
loding /ext/Data/distracted_driver_detection/train/c3, image count=2185
loding /ext/Data/distracted_driver_detection/train/c4, image count=2160
loding /ext/Data/distracted_driver_detection/train/c5, image count=2152
loding /ext/Data/distracted_driver_detection/train/c6, image count=2164
loding /ext/Data/distracted_driver_detection/train/c7, image count=1843
loding /ext/Data/distracted_driver_detection/train/c8, image count=1771
loding /ext/Data/distracted_driver_detection/train/c9, image count=1972
-------- loading valid data
loding /ext/Data/distracted_driver_detection/valid/c0, image count=181
loding /ext/Data/distracted_driver_detection/valid/c1, image count=171
loding /ext/Data/distracted_driver_detection/valid/c2, image count=181
loding /ext/Data/distracted_driver_detection/valid/c3, image count=161
loding /ext/Data/distracted_driver_detection/valid/c4, image count=166
loding /ext/Data/distracted_driver_detection/valid/c5, image count=160
loding /ext/Data/distracted_driver_detection/valid/c6, image count=161
loding /ext/Data/distracted_driver_detection/valid/c7, image count=159
loding /ext/Data/distracted_driver_detection/valid/c8, image count=140
loding /ext/Data/distracted_driver_detection/valid/c9, image count=157

分为训练集和验证集

In [3]:
print(X_train.shape)
print(y_train.shape)
print(X_valid.shape)
print(y_valid.shape)
(20787, 299, 299, 3)
(20787, 10)
(1637, 299, 299, 3)
(1637, 10)
In [4]:
base_model = Xception(input_tensor=Input((model_image_size, model_image_size, 3)), weights='imagenet', include_top=False)

for layers in base_model.layers:
    layers.trainable = False

x = GlobalAveragePooling2D()(base_model.output)
x = Dropout(0.25)(x)
x = Dense(10, activation='softmax')(x)
model = Model(base_model.input, x)
model.compile(optimizer='adadelta', loss='binary_crossentropy', metrics=['accuracy'])

#     model.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy'])
print("done")
done
In [5]:
model.summary()
____________________________________________________________________________________________________
Layer (type)                     Output Shape          Param #     Connected to                     
====================================================================================================
input_1 (InputLayer)             (None, 299, 299, 3)   0                                            
____________________________________________________________________________________________________
block1_conv1 (Conv2D)            (None, 149, 149, 32)  864         input_1[0][0]                    
____________________________________________________________________________________________________
block1_conv1_bn (BatchNormalizat (None, 149, 149, 32)  128         block1_conv1[0][0]               
____________________________________________________________________________________________________
block1_conv1_act (Activation)    (None, 149, 149, 32)  0           block1_conv1_bn[0][0]            
____________________________________________________________________________________________________
block1_conv2 (Conv2D)            (None, 147, 147, 64)  18432       block1_conv1_act[0][0]           
____________________________________________________________________________________________________
block1_conv2_bn (BatchNormalizat (None, 147, 147, 64)  256         block1_conv2[0][0]               
____________________________________________________________________________________________________
block1_conv2_act (Activation)    (None, 147, 147, 64)  0           block1_conv2_bn[0][0]            
____________________________________________________________________________________________________
block2_sepconv1 (SeparableConv2D (None, 147, 147, 128) 8768        block1_conv2_act[0][0]           
____________________________________________________________________________________________________
block2_sepconv1_bn (BatchNormali (None, 147, 147, 128) 512         block2_sepconv1[0][0]            
____________________________________________________________________________________________________
block2_sepconv2_act (Activation) (None, 147, 147, 128) 0           block2_sepconv1_bn[0][0]         
____________________________________________________________________________________________________
block2_sepconv2 (SeparableConv2D (None, 147, 147, 128) 17536       block2_sepconv2_act[0][0]        
____________________________________________________________________________________________________
block2_sepconv2_bn (BatchNormali (None, 147, 147, 128) 512         block2_sepconv2[0][0]            
____________________________________________________________________________________________________
conv2d_1 (Conv2D)                (None, 74, 74, 128)   8192        block1_conv2_act[0][0]           
____________________________________________________________________________________________________
block2_pool (MaxPooling2D)       (None, 74, 74, 128)   0           block2_sepconv2_bn[0][0]         
____________________________________________________________________________________________________
batch_normalization_1 (BatchNorm (None, 74, 74, 128)   512         conv2d_1[0][0]                   
____________________________________________________________________________________________________
add_1 (Add)                      (None, 74, 74, 128)   0           block2_pool[0][0]                
                                                                   batch_normalization_1[0][0]      
____________________________________________________________________________________________________
block3_sepconv1_act (Activation) (None, 74, 74, 128)   0           add_1[0][0]                      
____________________________________________________________________________________________________
block3_sepconv1 (SeparableConv2D (None, 74, 74, 256)   33920       block3_sepconv1_act[0][0]        
____________________________________________________________________________________________________
block3_sepconv1_bn (BatchNormali (None, 74, 74, 256)   1024        block3_sepconv1[0][0]            
____________________________________________________________________________________________________
block3_sepconv2_act (Activation) (None, 74, 74, 256)   0           block3_sepconv1_bn[0][0]         
____________________________________________________________________________________________________
block3_sepconv2 (SeparableConv2D (None, 74, 74, 256)   67840       block3_sepconv2_act[0][0]        
____________________________________________________________________________________________________
block3_sepconv2_bn (BatchNormali (None, 74, 74, 256)   1024        block3_sepconv2[0][0]            
____________________________________________________________________________________________________
conv2d_2 (Conv2D)                (None, 37, 37, 256)   32768       add_1[0][0]                      
____________________________________________________________________________________________________
block3_pool (MaxPooling2D)       (None, 37, 37, 256)   0           block3_sepconv2_bn[0][0]         
____________________________________________________________________________________________________
batch_normalization_2 (BatchNorm (None, 37, 37, 256)   1024        conv2d_2[0][0]                   
____________________________________________________________________________________________________
add_2 (Add)                      (None, 37, 37, 256)   0           block3_pool[0][0]                
                                                                   batch_normalization_2[0][0]      
____________________________________________________________________________________________________
block4_sepconv1_act (Activation) (None, 37, 37, 256)   0           add_2[0][0]                      
____________________________________________________________________________________________________
block4_sepconv1 (SeparableConv2D (None, 37, 37, 728)   188672      block4_sepconv1_act[0][0]        
____________________________________________________________________________________________________
block4_sepconv1_bn (BatchNormali (None, 37, 37, 728)   2912        block4_sepconv1[0][0]            
____________________________________________________________________________________________________
block4_sepconv2_act (Activation) (None, 37, 37, 728)   0           block4_sepconv1_bn[0][0]         
____________________________________________________________________________________________________
block4_sepconv2 (SeparableConv2D (None, 37, 37, 728)   536536      block4_sepconv2_act[0][0]        
____________________________________________________________________________________________________
block4_sepconv2_bn (BatchNormali (None, 37, 37, 728)   2912        block4_sepconv2[0][0]            
____________________________________________________________________________________________________
conv2d_3 (Conv2D)                (None, 19, 19, 728)   186368      add_2[0][0]                      
____________________________________________________________________________________________________
block4_pool (MaxPooling2D)       (None, 19, 19, 728)   0           block4_sepconv2_bn[0][0]         
____________________________________________________________________________________________________
batch_normalization_3 (BatchNorm (None, 19, 19, 728)   2912        conv2d_3[0][0]                   
____________________________________________________________________________________________________
add_3 (Add)                      (None, 19, 19, 728)   0           block4_pool[0][0]                
                                                                   batch_normalization_3[0][0]      
____________________________________________________________________________________________________
block5_sepconv1_act (Activation) (None, 19, 19, 728)   0           add_3[0][0]                      
____________________________________________________________________________________________________
block5_sepconv1 (SeparableConv2D (None, 19, 19, 728)   536536      block5_sepconv1_act[0][0]        
____________________________________________________________________________________________________
block5_sepconv1_bn (BatchNormali (None, 19, 19, 728)   2912        block5_sepconv1[0][0]            
____________________________________________________________________________________________________
block5_sepconv2_act (Activation) (None, 19, 19, 728)   0           block5_sepconv1_bn[0][0]         
____________________________________________________________________________________________________
block5_sepconv2 (SeparableConv2D (None, 19, 19, 728)   536536      block5_sepconv2_act[0][0]        
____________________________________________________________________________________________________
block5_sepconv2_bn (BatchNormali (None, 19, 19, 728)   2912        block5_sepconv2[0][0]            
____________________________________________________________________________________________________
block5_sepconv3_act (Activation) (None, 19, 19, 728)   0           block5_sepconv2_bn[0][0]         
____________________________________________________________________________________________________
block5_sepconv3 (SeparableConv2D (None, 19, 19, 728)   536536      block5_sepconv3_act[0][0]        
____________________________________________________________________________________________________
block5_sepconv3_bn (BatchNormali (None, 19, 19, 728)   2912        block5_sepconv3[0][0]            
____________________________________________________________________________________________________
add_4 (Add)                      (None, 19, 19, 728)   0           block5_sepconv3_bn[0][0]         
                                                                   add_3[0][0]                      
____________________________________________________________________________________________________
block6_sepconv1_act (Activation) (None, 19, 19, 728)   0           add_4[0][0]                      
____________________________________________________________________________________________________
block6_sepconv1 (SeparableConv2D (None, 19, 19, 728)   536536      block6_sepconv1_act[0][0]        
____________________________________________________________________________________________________
block6_sepconv1_bn (BatchNormali (None, 19, 19, 728)   2912        block6_sepconv1[0][0]            
____________________________________________________________________________________________________
block6_sepconv2_act (Activation) (None, 19, 19, 728)   0           block6_sepconv1_bn[0][0]         
____________________________________________________________________________________________________
block6_sepconv2 (SeparableConv2D (None, 19, 19, 728)   536536      block6_sepconv2_act[0][0]        
____________________________________________________________________________________________________
block6_sepconv2_bn (BatchNormali (None, 19, 19, 728)   2912        block6_sepconv2[0][0]            
____________________________________________________________________________________________________
block6_sepconv3_act (Activation) (None, 19, 19, 728)   0           block6_sepconv2_bn[0][0]         
____________________________________________________________________________________________________
block6_sepconv3 (SeparableConv2D (None, 19, 19, 728)   536536      block6_sepconv3_act[0][0]        
____________________________________________________________________________________________________
block6_sepconv3_bn (BatchNormali (None, 19, 19, 728)   2912        block6_sepconv3[0][0]            
____________________________________________________________________________________________________
add_5 (Add)                      (None, 19, 19, 728)   0           block6_sepconv3_bn[0][0]         
                                                                   add_4[0][0]                      
____________________________________________________________________________________________________
block7_sepconv1_act (Activation) (None, 19, 19, 728)   0           add_5[0][0]                      
____________________________________________________________________________________________________
block7_sepconv1 (SeparableConv2D (None, 19, 19, 728)   536536      block7_sepconv1_act[0][0]        
____________________________________________________________________________________________________
block7_sepconv1_bn (BatchNormali (None, 19, 19, 728)   2912        block7_sepconv1[0][0]            
____________________________________________________________________________________________________
block7_sepconv2_act (Activation) (None, 19, 19, 728)   0           block7_sepconv1_bn[0][0]         
____________________________________________________________________________________________________
block7_sepconv2 (SeparableConv2D (None, 19, 19, 728)   536536      block7_sepconv2_act[0][0]        
____________________________________________________________________________________________________
block7_sepconv2_bn (BatchNormali (None, 19, 19, 728)   2912        block7_sepconv2[0][0]            
____________________________________________________________________________________________________
block7_sepconv3_act (Activation) (None, 19, 19, 728)   0           block7_sepconv2_bn[0][0]         
____________________________________________________________________________________________________
block7_sepconv3 (SeparableConv2D (None, 19, 19, 728)   536536      block7_sepconv3_act[0][0]        
____________________________________________________________________________________________________
block7_sepconv3_bn (BatchNormali (None, 19, 19, 728)   2912        block7_sepconv3[0][0]            
____________________________________________________________________________________________________
add_6 (Add)                      (None, 19, 19, 728)   0           block7_sepconv3_bn[0][0]         
                                                                   add_5[0][0]                      
____________________________________________________________________________________________________
block8_sepconv1_act (Activation) (None, 19, 19, 728)   0           add_6[0][0]                      
____________________________________________________________________________________________________
block8_sepconv1 (SeparableConv2D (None, 19, 19, 728)   536536      block8_sepconv1_act[0][0]        
____________________________________________________________________________________________________
block8_sepconv1_bn (BatchNormali (None, 19, 19, 728)   2912        block8_sepconv1[0][0]            
____________________________________________________________________________________________________
block8_sepconv2_act (Activation) (None, 19, 19, 728)   0           block8_sepconv1_bn[0][0]         
____________________________________________________________________________________________________
block8_sepconv2 (SeparableConv2D (None, 19, 19, 728)   536536      block8_sepconv2_act[0][0]        
____________________________________________________________________________________________________
block8_sepconv2_bn (BatchNormali (None, 19, 19, 728)   2912        block8_sepconv2[0][0]            
____________________________________________________________________________________________________
block8_sepconv3_act (Activation) (None, 19, 19, 728)   0           block8_sepconv2_bn[0][0]         
____________________________________________________________________________________________________
block8_sepconv3 (SeparableConv2D (None, 19, 19, 728)   536536      block8_sepconv3_act[0][0]        
____________________________________________________________________________________________________
block8_sepconv3_bn (BatchNormali (None, 19, 19, 728)   2912        block8_sepconv3[0][0]            
____________________________________________________________________________________________________
add_7 (Add)                      (None, 19, 19, 728)   0           block8_sepconv3_bn[0][0]         
                                                                   add_6[0][0]                      
____________________________________________________________________________________________________
block9_sepconv1_act (Activation) (None, 19, 19, 728)   0           add_7[0][0]                      
____________________________________________________________________________________________________
block9_sepconv1 (SeparableConv2D (None, 19, 19, 728)   536536      block9_sepconv1_act[0][0]        
____________________________________________________________________________________________________
block9_sepconv1_bn (BatchNormali (None, 19, 19, 728)   2912        block9_sepconv1[0][0]            
____________________________________________________________________________________________________
block9_sepconv2_act (Activation) (None, 19, 19, 728)   0           block9_sepconv1_bn[0][0]         
____________________________________________________________________________________________________
block9_sepconv2 (SeparableConv2D (None, 19, 19, 728)   536536      block9_sepconv2_act[0][0]        
____________________________________________________________________________________________________
block9_sepconv2_bn (BatchNormali (None, 19, 19, 728)   2912        block9_sepconv2[0][0]            
____________________________________________________________________________________________________
block9_sepconv3_act (Activation) (None, 19, 19, 728)   0           block9_sepconv2_bn[0][0]         
____________________________________________________________________________________________________
block9_sepconv3 (SeparableConv2D (None, 19, 19, 728)   536536      block9_sepconv3_act[0][0]        
____________________________________________________________________________________________________
block9_sepconv3_bn (BatchNormali (None, 19, 19, 728)   2912        block9_sepconv3[0][0]            
____________________________________________________________________________________________________
add_8 (Add)                      (None, 19, 19, 728)   0           block9_sepconv3_bn[0][0]         
                                                                   add_7[0][0]                      
____________________________________________________________________________________________________
block10_sepconv1_act (Activation (None, 19, 19, 728)   0           add_8[0][0]                      
____________________________________________________________________________________________________
block10_sepconv1 (SeparableConv2 (None, 19, 19, 728)   536536      block10_sepconv1_act[0][0]       
____________________________________________________________________________________________________
block10_sepconv1_bn (BatchNormal (None, 19, 19, 728)   2912        block10_sepconv1[0][0]           
____________________________________________________________________________________________________
block10_sepconv2_act (Activation (None, 19, 19, 728)   0           block10_sepconv1_bn[0][0]        
____________________________________________________________________________________________________
block10_sepconv2 (SeparableConv2 (None, 19, 19, 728)   536536      block10_sepconv2_act[0][0]       
____________________________________________________________________________________________________
block10_sepconv2_bn (BatchNormal (None, 19, 19, 728)   2912        block10_sepconv2[0][0]           
____________________________________________________________________________________________________
block10_sepconv3_act (Activation (None, 19, 19, 728)   0           block10_sepconv2_bn[0][0]        
____________________________________________________________________________________________________
block10_sepconv3 (SeparableConv2 (None, 19, 19, 728)   536536      block10_sepconv3_act[0][0]       
____________________________________________________________________________________________________
block10_sepconv3_bn (BatchNormal (None, 19, 19, 728)   2912        block10_sepconv3[0][0]           
____________________________________________________________________________________________________
add_9 (Add)                      (None, 19, 19, 728)   0           block10_sepconv3_bn[0][0]        
                                                                   add_8[0][0]                      
____________________________________________________________________________________________________
block11_sepconv1_act (Activation (None, 19, 19, 728)   0           add_9[0][0]                      
____________________________________________________________________________________________________
block11_sepconv1 (SeparableConv2 (None, 19, 19, 728)   536536      block11_sepconv1_act[0][0]       
____________________________________________________________________________________________________
block11_sepconv1_bn (BatchNormal (None, 19, 19, 728)   2912        block11_sepconv1[0][0]           
____________________________________________________________________________________________________
block11_sepconv2_act (Activation (None, 19, 19, 728)   0           block11_sepconv1_bn[0][0]        
____________________________________________________________________________________________________
block11_sepconv2 (SeparableConv2 (None, 19, 19, 728)   536536      block11_sepconv2_act[0][0]       
____________________________________________________________________________________________________
block11_sepconv2_bn (BatchNormal (None, 19, 19, 728)   2912        block11_sepconv2[0][0]           
____________________________________________________________________________________________________
block11_sepconv3_act (Activation (None, 19, 19, 728)   0           block11_sepconv2_bn[0][0]        
____________________________________________________________________________________________________
block11_sepconv3 (SeparableConv2 (None, 19, 19, 728)   536536      block11_sepconv3_act[0][0]       
____________________________________________________________________________________________________
block11_sepconv3_bn (BatchNormal (None, 19, 19, 728)   2912        block11_sepconv3[0][0]           
____________________________________________________________________________________________________
add_10 (Add)                     (None, 19, 19, 728)   0           block11_sepconv3_bn[0][0]        
                                                                   add_9[0][0]                      
____________________________________________________________________________________________________
block12_sepconv1_act (Activation (None, 19, 19, 728)   0           add_10[0][0]                     
____________________________________________________________________________________________________
block12_sepconv1 (SeparableConv2 (None, 19, 19, 728)   536536      block12_sepconv1_act[0][0]       
____________________________________________________________________________________________________
block12_sepconv1_bn (BatchNormal (None, 19, 19, 728)   2912        block12_sepconv1[0][0]           
____________________________________________________________________________________________________
block12_sepconv2_act (Activation (None, 19, 19, 728)   0           block12_sepconv1_bn[0][0]        
____________________________________________________________________________________________________
block12_sepconv2 (SeparableConv2 (None, 19, 19, 728)   536536      block12_sepconv2_act[0][0]       
____________________________________________________________________________________________________
block12_sepconv2_bn (BatchNormal (None, 19, 19, 728)   2912        block12_sepconv2[0][0]           
____________________________________________________________________________________________________
block12_sepconv3_act (Activation (None, 19, 19, 728)   0           block12_sepconv2_bn[0][0]        
____________________________________________________________________________________________________
block12_sepconv3 (SeparableConv2 (None, 19, 19, 728)   536536      block12_sepconv3_act[0][0]       
____________________________________________________________________________________________________
block12_sepconv3_bn (BatchNormal (None, 19, 19, 728)   2912        block12_sepconv3[0][0]           
____________________________________________________________________________________________________
add_11 (Add)                     (None, 19, 19, 728)   0           block12_sepconv3_bn[0][0]        
                                                                   add_10[0][0]                     
____________________________________________________________________________________________________
block13_sepconv1_act (Activation (None, 19, 19, 728)   0           add_11[0][0]                     
____________________________________________________________________________________________________
block13_sepconv1 (SeparableConv2 (None, 19, 19, 728)   536536      block13_sepconv1_act[0][0]       
____________________________________________________________________________________________________
block13_sepconv1_bn (BatchNormal (None, 19, 19, 728)   2912        block13_sepconv1[0][0]           
____________________________________________________________________________________________________
block13_sepconv2_act (Activation (None, 19, 19, 728)   0           block13_sepconv1_bn[0][0]        
____________________________________________________________________________________________________
block13_sepconv2 (SeparableConv2 (None, 19, 19, 1024)  752024      block13_sepconv2_act[0][0]       
____________________________________________________________________________________________________
block13_sepconv2_bn (BatchNormal (None, 19, 19, 1024)  4096        block13_sepconv2[0][0]           
____________________________________________________________________________________________________
conv2d_4 (Conv2D)                (None, 10, 10, 1024)  745472      add_11[0][0]                     
____________________________________________________________________________________________________
block13_pool (MaxPooling2D)      (None, 10, 10, 1024)  0           block13_sepconv2_bn[0][0]        
____________________________________________________________________________________________________
batch_normalization_4 (BatchNorm (None, 10, 10, 1024)  4096        conv2d_4[0][0]                   
____________________________________________________________________________________________________
add_12 (Add)                     (None, 10, 10, 1024)  0           block13_pool[0][0]               
                                                                   batch_normalization_4[0][0]      
____________________________________________________________________________________________________
block14_sepconv1 (SeparableConv2 (None, 10, 10, 1536)  1582080     add_12[0][0]                     
____________________________________________________________________________________________________
block14_sepconv1_bn (BatchNormal (None, 10, 10, 1536)  6144        block14_sepconv1[0][0]           
____________________________________________________________________________________________________
block14_sepconv1_act (Activation (None, 10, 10, 1536)  0           block14_sepconv1_bn[0][0]        
____________________________________________________________________________________________________
block14_sepconv2 (SeparableConv2 (None, 10, 10, 2048)  3159552     block14_sepconv1_act[0][0]       
____________________________________________________________________________________________________
block14_sepconv2_bn (BatchNormal (None, 10, 10, 2048)  8192        block14_sepconv2[0][0]           
____________________________________________________________________________________________________
block14_sepconv2_act (Activation (None, 10, 10, 2048)  0           block14_sepconv2_bn[0][0]        
____________________________________________________________________________________________________
global_average_pooling2d_1 (Glob (None, 2048)          0           block14_sepconv2_act[0][0]       
____________________________________________________________________________________________________
dropout_1 (Dropout)              (None, 2048)          0           global_average_pooling2d_1[0][0] 
____________________________________________________________________________________________________
dense_1 (Dense)                  (None, 10)            20490       dropout_1[0][0]                  
====================================================================================================
Total params: 20,881,970
Trainable params: 20,490
Non-trainable params: 20,861,480
____________________________________________________________________________________________________

训练模型

In [6]:
model.fit(X_train, y_train, batch_size=16, epochs=10, validation_data=(X_valid, y_valid))

model.save("models/resnet50-mymodel.h5")
Train on 20787 samples, validate on 1637 samples
Epoch 1/10
20787/20787 [==============================] - 188s - loss: 0.2563 - acc: 0.9037 - val_loss: 0.2671 - val_acc: 0.9017
Epoch 2/10
20787/20787 [==============================] - 182s - loss: 0.1810 - acc: 0.9268 - val_loss: 0.2505 - val_acc: 0.9062
Epoch 3/10
20787/20787 [==============================] - 182s - loss: 0.1485 - acc: 0.9442 - val_loss: 0.2450 - val_acc: 0.9081
Epoch 4/10
20787/20787 [==============================] - 181s - loss: 0.1302 - acc: 0.9529 - val_loss: 0.2363 - val_acc: 0.9091
Epoch 5/10
20787/20787 [==============================] - 181s - loss: 0.1167 - acc: 0.9592 - val_loss: 0.2397 - val_acc: 0.9100
Epoch 6/10
20787/20787 [==============================] - 180s - loss: 0.1084 - acc: 0.9622 - val_loss: 0.2443 - val_acc: 0.9045
Epoch 7/10
20787/20787 [==============================] - 181s - loss: 0.1020 - acc: 0.9655 - val_loss: 0.2533 - val_acc: 0.9042
Epoch 8/10
20787/20787 [==============================] - 180s - loss: 0.0968 - acc: 0.9671 - val_loss: 0.2495 - val_acc: 0.9028
Epoch 9/10
20787/20787 [==============================] - 177s - loss: 0.0928 - acc: 0.9686 - val_loss: 0.2457 - val_acc: 0.9043
Epoch 10/10
20787/20787 [==============================] - 177s - loss: 0.0895 - acc: 0.9699 - val_loss: 0.2496 - val_acc: 0.9029
In [7]:
from IPython.display import SVG
from keras.utils.vis_utils import model_to_dot
from keras.models import *

model = load_model("models/resnet50-mymodel.h5")
print("load successed")

SVG(model_to_dot(model).create(prog='dot', format='svg'))
load successed
Out[7]:
G 139854740149024 input_1: InputLayer 139854740149136 block1_conv1: Conv2D 139854740149024->139854740149136 139854740149920 block1_conv1_bn: BatchNormalization 139854740149136->139854740149920 139854740149248 block1_conv1_act: Activation 139854740149920->139854740149248 139854740150312 block1_conv2: Conv2D 139854740149248->139854740150312 139854740150200 block1_conv2_bn: BatchNormalization 139854740150312->139854740150200 139854740151208 block1_conv2_act: Activation 139854740150200->139854740151208 139854740151712 block2_sepconv1: SeparableConv2D 139854740151208->139854740151712 139854740186672 conv2d_1: Conv2D 139854740151208->139854740186672 139854740151768 block2_sepconv1_bn: BatchNormalization 139854740151712->139854740151768 139854740185608 block2_sepconv2_act: Activation 139854740151768->139854740185608 139854740185664 block2_sepconv2: SeparableConv2D 139854740185608->139854740185664 139854740186280 block2_sepconv2_bn: BatchNormalization 139854740185664->139854740186280 139854740187008 block2_pool: MaxPooling2D 139854740186280->139854740187008 139854740187232 batch_normalization_1: BatchNormalization 139854740186672->139854740187232 139854740187512 add_1: Add 139854740187008->139854740187512 139854740187232->139854740187512 139854740187568 block3_sepconv1_act: Activation 139854740187512->139854740187568 139854740210240 conv2d_2: Conv2D 139854740187512->139854740210240 139854740187624 block3_sepconv1: SeparableConv2D 139854740187568->139854740187624 139854740188240 block3_sepconv1_bn: BatchNormalization 139854740187624->139854740188240 139854740188632 block3_sepconv2_act: Activation 139854740188240->139854740188632 139854740188688 block3_sepconv2: SeparableConv2D 139854740188632->139854740188688 139854740152216 block3_sepconv2_bn: BatchNormalization 139854740188688->139854740152216 139854740210576 block3_pool: MaxPooling2D 139854740152216->139854740210576 139854740210800 batch_normalization_2: BatchNormalization 139854740210240->139854740210800 139854740211080 add_2: Add 139854740210576->139854740211080 139854740210800->139854740211080 139854740211136 block4_sepconv1_act: Activation 139854740211080->139854740211136 139854740213264 conv2d_3: Conv2D 139854740211080->139854740213264 139854740211192 block4_sepconv1: SeparableConv2D 139854740211136->139854740211192 139854740211808 block4_sepconv1_bn: BatchNormalization 139854740211192->139854740211808 139854740212200 block4_sepconv2_act: Activation 139854740211808->139854740212200 139854740212256 block4_sepconv2: SeparableConv2D 139854740212200->139854740212256 139854740212872 block4_sepconv2_bn: BatchNormalization 139854740212256->139854740212872 139854740213600 block4_pool: MaxPooling2D 139854740212872->139854740213600 139854740226176 batch_normalization_3: BatchNormalization 139854740213264->139854740226176 139854740226456 add_3: Add 139854740213600->139854740226456 139854740226176->139854740226456 139854740226512 block5_sepconv1_act: Activation 139854740226456->139854740226512 139854740229704 add_4: Add 139854740226456->139854740229704 139854740226568 block5_sepconv1: SeparableConv2D 139854740226512->139854740226568 139854740227184 block5_sepconv1_bn: BatchNormalization 139854740226568->139854740227184 139854740227576 block5_sepconv2_act: Activation 139854740227184->139854740227576 139854740227632 block5_sepconv2: SeparableConv2D 139854740227576->139854740227632 139854740228248 block5_sepconv2_bn: BatchNormalization 139854740227632->139854740228248 139854740228640 block5_sepconv3_act: Activation 139854740228248->139854740228640 139854740228696 block5_sepconv3: SeparableConv2D 139854740228640->139854740228696 139854740229312 block5_sepconv3_bn: BatchNormalization 139854740228696->139854740229312 139854740229312->139854740229704 139854740229760 block6_sepconv1_act: Activation 139854740229704->139854740229760 139854740261688 add_5: Add 139854740229704->139854740261688 139854740229816 block6_sepconv1: SeparableConv2D 139854740229760->139854740229816 139854740189136 block6_sepconv1_bn: BatchNormalization 139854740229816->139854740189136 139854740259560 block6_sepconv2_act: Activation 139854740189136->139854740259560 139854740259616 block6_sepconv2: SeparableConv2D 139854740259560->139854740259616 139854740260232 block6_sepconv2_bn: BatchNormalization 139854740259616->139854740260232 139854740260624 block6_sepconv3_act: Activation 139854740260232->139854740260624 139854740260680 block6_sepconv3: SeparableConv2D 139854740260624->139854740260680 139854740261296 block6_sepconv3_bn: BatchNormalization 139854740260680->139854740261296 139854740261296->139854740261688 139854740261744 block7_sepconv1_act: Activation 139854740261688->139854740261744 139854740285480 add_6: Add 139854740261688->139854740285480 139854740261800 block7_sepconv1: SeparableConv2D 139854740261744->139854740261800 139854740262416 block7_sepconv1_bn: BatchNormalization 139854740261800->139854740262416 139854740262808 block7_sepconv2_act: Activation 139854740262416->139854740262808 139854740229928 block7_sepconv2: SeparableConv2D 139854740262808->139854740229928 139854740284024 block7_sepconv2_bn: BatchNormalization 139854740229928->139854740284024 139854740284416 block7_sepconv3_act: Activation 139854740284024->139854740284416 139854740284472 block7_sepconv3: SeparableConv2D 139854740284416->139854740284472 139854740285088 block7_sepconv3_bn: BatchNormalization 139854740284472->139854740285088 139854740285088->139854740285480 139854740285536 block8_sepconv1_act: Activation 139854740285480->139854740285536 139854740276504 add_7: Add 139854740285480->139854740276504 139854740285592 block8_sepconv1: SeparableConv2D 139854740285536->139854740285592 139854740286208 block8_sepconv1_bn: BatchNormalization 139854740285592->139854740286208 139854740286600 block8_sepconv2_act: Activation 139854740286208->139854740286600 139854740286656 block8_sepconv2: SeparableConv2D 139854740286600->139854740286656 139854740287272 block8_sepconv2_bn: BatchNormalization 139854740286656->139854740287272 139854740262864 block8_sepconv3_act: Activation 139854740287272->139854740262864 139854740275496 block8_sepconv3: SeparableConv2D 139854740262864->139854740275496 139854740276112 block8_sepconv3_bn: BatchNormalization 139854740275496->139854740276112 139854740276112->139854740276504 139854740276560 block9_sepconv1_act: Activation 139854740276504->139854740276560 139854739554824 add_8: Add 139854740276504->139854739554824 139854740276616 block9_sepconv1: SeparableConv2D 139854740276560->139854740276616 139854740277232 block9_sepconv1_bn: BatchNormalization 139854740276616->139854740277232 139854740277624 block9_sepconv2_act: Activation 139854740277232->139854740277624 139854740277680 block9_sepconv2: SeparableConv2D 139854740277624->139854740277680 139854740278296 block9_sepconv2_bn: BatchNormalization 139854740277680->139854740278296 139854740278688 block9_sepconv3_act: Activation 139854740278296->139854740278688 139854740278744 block9_sepconv3: SeparableConv2D 139854740278688->139854740278744 139854740287440 block9_sepconv3_bn: BatchNormalization 139854740278744->139854740287440 139854740287440->139854739554824 139854739554880 block10_sepconv1_act: Activation 139854739554824->139854739554880 139854739558072 add_9: Add 139854739554824->139854739558072 139854739554936 block10_sepconv1: SeparableConv2D 139854739554880->139854739554936 139854739555552 block10_sepconv1_bn: BatchNormalization 139854739554936->139854739555552 139854739555944 block10_sepconv2_act: Activation 139854739555552->139854739555944 139854739556000 block10_sepconv2: SeparableConv2D 139854739555944->139854739556000 139854739556616 block10_sepconv2_bn: BatchNormalization 139854739556000->139854739556616 139854739557008 block10_sepconv3_act: Activation 139854739556616->139854739557008 139854739557064 block10_sepconv3: SeparableConv2D 139854739557008->139854739557064 139854739557680 block10_sepconv3_bn: BatchNormalization 139854739557064->139854739557680 139854739557680->139854739558072 139854739558128 block11_sepconv1_act: Activation 139854739558072->139854739558128 139854739585960 add_10: Add 139854739558072->139854739585960 139854739558184 block11_sepconv1: SeparableConv2D 139854739558128->139854739558184 139854740279192 block11_sepconv1_bn: BatchNormalization 139854739558184->139854740279192 139854739583832 block11_sepconv2_act: Activation 139854740279192->139854739583832 139854739583888 block11_sepconv2: SeparableConv2D 139854739583832->139854739583888 139854739584504 block11_sepconv2_bn: BatchNormalization 139854739583888->139854739584504 139854739584896 block11_sepconv3_act: Activation 139854739584504->139854739584896 139854739584952 block11_sepconv3: SeparableConv2D 139854739584896->139854739584952 139854739585568 block11_sepconv3_bn: BatchNormalization 139854739584952->139854739585568 139854739585568->139854739585960 139854739586016 block12_sepconv1_act: Activation 139854739585960->139854739586016 139854739597464 add_11: Add 139854739585960->139854739597464 139854739586072 block12_sepconv1: SeparableConv2D 139854739586016->139854739586072 139854739586688 block12_sepconv1_bn: BatchNormalization 139854739586072->139854739586688 139854739558296 block12_sepconv2_act: Activation 139854739586688->139854739558296 139854739595392 block12_sepconv2: SeparableConv2D 139854739558296->139854739595392 139854739596008 block12_sepconv2_bn: BatchNormalization 139854739595392->139854739596008 139854739596400 block12_sepconv3_act: Activation 139854739596008->139854739596400 139854739596456 block12_sepconv3: SeparableConv2D 139854739596400->139854739596456 139854739597072 block12_sepconv3_bn: BatchNormalization 139854739596456->139854739597072 139854739597072->139854739597464 139854739597520 block13_sepconv1_act: Activation 139854739597464->139854739597520 139854739616096 conv2d_4: Conv2D 139854739597464->139854739616096 139854739597576 block13_sepconv1: SeparableConv2D 139854739597520->139854739597576 139854739598192 block13_sepconv1_bn: BatchNormalization 139854739597576->139854739598192 139854739598584 block13_sepconv2_act: Activation 139854739598192->139854739598584 139854739598640 block13_sepconv2: SeparableConv2D 139854739598584->139854739598640 139854739587024 block13_sepconv2_bn: BatchNormalization 139854739598640->139854739587024 139854739616432 block13_pool: MaxPooling2D 139854739587024->139854739616432 139854739616656 batch_normalization_4: BatchNormalization 139854739616096->139854739616656 139854739616936 add_12: Add 139854739616432->139854739616936 139854739616656->139854739616936 139854739616992 block14_sepconv1: SeparableConv2D 139854739616936->139854739616992 139854739617608 block14_sepconv1_bn: BatchNormalization 139854739616992->139854739617608 139854739618000 block14_sepconv1_act: Activation 139854739617608->139854739618000 139854739618056 block14_sepconv2: SeparableConv2D 139854739618000->139854739618056 139854739618672 block14_sepconv2_bn: BatchNormalization 139854739618056->139854739618672 139854739619064 block14_sepconv2_act: Activation 139854739618672->139854739619064 139854739619120 global_average_pooling2d_1: GlobalAveragePooling2D 139854739619064->139854739619120 139854739619232 dropout_1: Dropout 139854739619120->139854739619232 139854739619288 dense_1: Dense 139854739619232->139854739619288

CAM 可视化

http://cnnlocalization.csail.mit.edu/

$cam = (P-0.5)*w*output$

  • cam: 类激活图
  • P: 概率
  • output: 卷积层的输出 2048*1
  • w: 卷积核的权重 x*x*2048
In [8]:
z = zip([x.name for x in model.layers], range(len(model.layers)))
for k, v in z:
    print("{} - {}".format(k,v))
input_1 - 0
block1_conv1 - 1
block1_conv1_bn - 2
block1_conv1_act - 3
block1_conv2 - 4
block1_conv2_bn - 5
block1_conv2_act - 6
block2_sepconv1 - 7
block2_sepconv1_bn - 8
block2_sepconv2_act - 9
block2_sepconv2 - 10
block2_sepconv2_bn - 11
conv2d_1 - 12
block2_pool - 13
batch_normalization_1 - 14
add_1 - 15
block3_sepconv1_act - 16
block3_sepconv1 - 17
block3_sepconv1_bn - 18
block3_sepconv2_act - 19
block3_sepconv2 - 20
block3_sepconv2_bn - 21
conv2d_2 - 22
block3_pool - 23
batch_normalization_2 - 24
add_2 - 25
block4_sepconv1_act - 26
block4_sepconv1 - 27
block4_sepconv1_bn - 28
block4_sepconv2_act - 29
block4_sepconv2 - 30
block4_sepconv2_bn - 31
conv2d_3 - 32
block4_pool - 33
batch_normalization_3 - 34
add_3 - 35
block5_sepconv1_act - 36
block5_sepconv1 - 37
block5_sepconv1_bn - 38
block5_sepconv2_act - 39
block5_sepconv2 - 40
block5_sepconv2_bn - 41
block5_sepconv3_act - 42
block5_sepconv3 - 43
block5_sepconv3_bn - 44
add_4 - 45
block6_sepconv1_act - 46
block6_sepconv1 - 47
block6_sepconv1_bn - 48
block6_sepconv2_act - 49
block6_sepconv2 - 50
block6_sepconv2_bn - 51
block6_sepconv3_act - 52
block6_sepconv3 - 53
block6_sepconv3_bn - 54
add_5 - 55
block7_sepconv1_act - 56
block7_sepconv1 - 57
block7_sepconv1_bn - 58
block7_sepconv2_act - 59
block7_sepconv2 - 60
block7_sepconv2_bn - 61
block7_sepconv3_act - 62
block7_sepconv3 - 63
block7_sepconv3_bn - 64
add_6 - 65
block8_sepconv1_act - 66
block8_sepconv1 - 67
block8_sepconv1_bn - 68
block8_sepconv2_act - 69
block8_sepconv2 - 70
block8_sepconv2_bn - 71
block8_sepconv3_act - 72
block8_sepconv3 - 73
block8_sepconv3_bn - 74
add_7 - 75
block9_sepconv1_act - 76
block9_sepconv1 - 77
block9_sepconv1_bn - 78
block9_sepconv2_act - 79
block9_sepconv2 - 80
block9_sepconv2_bn - 81
block9_sepconv3_act - 82
block9_sepconv3 - 83
block9_sepconv3_bn - 84
add_8 - 85
block10_sepconv1_act - 86
block10_sepconv1 - 87
block10_sepconv1_bn - 88
block10_sepconv2_act - 89
block10_sepconv2 - 90
block10_sepconv2_bn - 91
block10_sepconv3_act - 92
block10_sepconv3 - 93
block10_sepconv3_bn - 94
add_9 - 95
block11_sepconv1_act - 96
block11_sepconv1 - 97
block11_sepconv1_bn - 98
block11_sepconv2_act - 99
block11_sepconv2 - 100
block11_sepconv2_bn - 101
block11_sepconv3_act - 102
block11_sepconv3 - 103
block11_sepconv3_bn - 104
add_10 - 105
block12_sepconv1_act - 106
block12_sepconv1 - 107
block12_sepconv1_bn - 108
block12_sepconv2_act - 109
block12_sepconv2 - 110
block12_sepconv2_bn - 111
block12_sepconv3_act - 112
block12_sepconv3 - 113
block12_sepconv3_bn - 114
add_11 - 115
block13_sepconv1_act - 116
block13_sepconv1 - 117
block13_sepconv1_bn - 118
block13_sepconv2_act - 119
block13_sepconv2 - 120
block13_sepconv2_bn - 121
conv2d_4 - 122
block13_pool - 123
batch_normalization_4 - 124
add_12 - 125
block14_sepconv1 - 126
block14_sepconv1_bn - 127
block14_sepconv1_act - 128
block14_sepconv2 - 129
block14_sepconv2_bn - 130
block14_sepconv2_act - 131
global_average_pooling2d_1 - 132
dropout_1 - 133
dense_1 - 134
In [16]:
import matplotlib.pyplot as plt
import random
%matplotlib inline
%config InlineBackend.figure_format = 'retina'

def show_heatmap_image(model_show, weights_show):
    test_dir = os.path.join(basedir,  "test", "test" )
    image_files = glob.glob(os.path.join(test_dir,"*"))
    print(len(image_files))
    
    plt.figure(figsize=(12, 14))
    for i in range(16):
        plt.subplot(4, 4, i+1)
        img = cv2.imread(image_files[2000*i+113])
        img = cv2.resize(img, (299, 299))
        x = img.copy()
        x.astype(np.float32)
        out, predictions = model_show.predict(np.expand_dims(x, axis=0))
        predictions = predictions[0]
        out = out[0]
        
        max_idx = np.argmax(predictions)
        prediction = predictions[max_idx]
        
        status = ["safe driving",  " texting - right",  "phone - right",  "texting - left",  "phone - left", 
                  "operation radio", "drinking", "reaching behind", "hair and makeup", "talking"]

        plt.title('c%d |%s| %.2f%%' % (max_idx , status[max_idx], prediction*100))
    
        cam = (prediction - 0.5) * np.matmul(out, weights_show)
        cam = cam[:,:,max_idx]
        cam -= cam.min()
        cam /= cam.max()
        cam -= 0.2
        cam /= 0.8

        cam = cv2.resize(cam, (299, 299))
        heatmap = cv2.applyColorMap(np.uint8(255*cam), cv2.COLORMAP_JET)
        heatmap[np.where(cam <= 0.2)] = 0

        out = cv2.addWeighted(img, 0.8, heatmap, 0.4, 0)

        plt.axis('off')
        plt.imshow(out[:,:,::-1])
print("done")
done
In [17]:
weights = model.layers[134].get_weights()[0]
layer_output = model.layers[131].output
model2 = Model(model.input, [layer_output, model.output])
print("layer_output {0}".format(layer_output))
print("weights shape {0}".format(weights.shape))
show_heatmap_image(model2, weights)
layer_output Tensor("block14_sepconv2_act_1/Relu:0", shape=(?, 10, 10, 2048), dtype=float32)
weights shape (2048, 10)
79726
In [18]:
weights = model.layers[134].get_weights()[0]
layer_output = model.layers[130].output
model2 = Model(model.input, [layer_output, model.output])
print("layer_output {0}".format(layer_output))
print("weights shape {0}".format(weights.shape))
show_heatmap_image(model2, weights)
layer_output Tensor("block14_sepconv2_bn_1/cond/Merge:0", shape=(?, 10, 10, 2048), dtype=float32)
weights shape (2048, 10)
79726
In [19]:
weights = model.layers[134].get_weights()[0]
layer_output = model.layers[129].output
model2 = Model(model.input, [layer_output, model.output])
print("layer_output {0}".format(layer_output))
print("weights shape {0}".format(weights.shape))
show_heatmap_image(model2, weights)
layer_output Tensor("block14_sepconv2_1/separable_conv2d:0", shape=(?, 10, 10, 2048), dtype=float32)
weights shape (2048, 10)
79726
In [ ]: